import numpy as np
import tensorflow.compat.v2 as tf
tf.enable_v2_behavior()
import pandas as pd
from tensorflow import keras
from sklearn.preprocessing import StandardScaler
from sklearn.preprocessing import RobustScaler
from sklearn.preprocessing import MinMaxScaler
from matplotlib import pyplot
import plotly.graph_objects as go
import math
import seaborn as sns
from sklearn.metrics import mean_squared_error
np.random.seed(1)
tf.random.set_seed(1)
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, LSTM, GRU, Dropout, RepeatVector, TimeDistributed
from keras import backend
MODELFILENAME = 'MODELS/LSTM_3d_TFM_2c'
TIME_STEPS=432 #3d
CMODEL = LSTM
UNITS=43
DROPOUT1=0.405
DROPOUT2=0.331
ACTIVATION='tanh'
OPTIMIZER='adadelta'
EPOCHS=56
BATCHSIZE=11
VALIDATIONSPLIT=0.1
# Code to read csv file into Colaboratory:
# from google.colab import files
# uploaded = files.upload()
# import io
# df = pd.read_csv(io.BytesIO(uploaded['SentDATA.csv']))
# Dataset is now stored in a Pandas Dataframe
df = pd.read_csv('../../data/dadesTFM.csv')
df.reset_index(inplace=True)
df['Time'] = pd.to_datetime(df['Time'])
df = df.set_index('Time')
columns = ['PM1','PM25','PM10','PM1ATM','PM25ATM','PM10ATM']
df1 = df.copy();
df1 = df1.rename(columns={"PM 1":"PM1","PM 2.5":"PM25","PM 10":"PM10","PM 1 ATM":"PM1ATM","PM 2.5 ATM":"PM25ATM","PM 10 ATM":"PM10ATM"})
df1['PM1'] = df['PM 1'].astype(np.float32)
df1['PM25'] = df['PM 2.5'].astype(np.float32)
df1['PM10'] = df['PM 10'].astype(np.float32)
df1['PM1ATM'] = df['PM 1 ATM'].astype(np.float32)
df1['PM25ATM'] = df['PM 2.5 ATM'].astype(np.float32)
df1['PM10ATM'] = df['PM 10 ATM'].astype(np.float32)
df2 = df1.copy()
train_size = int(len(df2) * 0.8)
test_size = len(df2) - train_size
train, test = df2.iloc[0:train_size], df2.iloc[train_size:len(df2)]
train.shape, test.shape
((3117, 7), (780, 7))
#Standardize the data
for col in columns:
scaler = StandardScaler()
train[col] = scaler.fit_transform(train[[col]])
<ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]]) <ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]]) <ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]]) <ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]]) <ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]]) <ipython-input-6-83cecdbc25f8>:4: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy train[col] = scaler.fit_transform(train[[col]])
def create_sequences(X, y, time_steps=TIME_STEPS):
Xs, ys = [], []
for i in range(len(X)-time_steps):
Xs.append(X.iloc[i:(i+time_steps)].values)
ys.append(y.iloc[i+time_steps])
return np.array(Xs), np.array(ys)
X_train, y_train = create_sequences(train[[columns[1]]], train[columns[1]])
#X_test, y_test = create_sequences(test[[columns[1]]], test[columns[1]])
print(f'X_train shape: {X_train.shape}')
print(f'y_train shape: {y_train.shape}')
X_train shape: (2685, 432, 1) y_train shape: (2685,)
#afegir nova mètrica
def rmse(y_true, y_pred):
return backend.sqrt(backend.mean(backend.square(y_pred - y_true), axis=-1))
model = Sequential()
model.add(CMODEL(units = UNITS, return_sequences=True, input_shape=(X_train.shape[1], X_train.shape[2])))
model.add(Dropout(rate=DROPOUT1))
model.add(CMODEL(units = UNITS, return_sequences=True))
model.add(Dropout(rate=DROPOUT2))
model.add(TimeDistributed(Dense(1,kernel_initializer='normal',activation=ACTIVATION)))
model.compile(optimizer=OPTIMIZER, loss='mae',metrics=['mse',rmse])
model.summary()
Model: "sequential" _________________________________________________________________ Layer (type) Output Shape Param # ================================================================= lstm (LSTM) (None, 432, 43) 7740 _________________________________________________________________ dropout (Dropout) (None, 432, 43) 0 _________________________________________________________________ lstm_1 (LSTM) (None, 432, 43) 14964 _________________________________________________________________ dropout_1 (Dropout) (None, 432, 43) 0 _________________________________________________________________ time_distributed (TimeDistri (None, 432, 1) 44 ================================================================= Total params: 22,748 Trainable params: 22,748 Non-trainable params: 0 _________________________________________________________________
history = model.fit(X_train, y_train, epochs=EPOCHS, batch_size=BATCHSIZE, validation_split=VALIDATIONSPLIT,
callbacks=[keras.callbacks.EarlyStopping(monitor='val_loss', patience=5, mode='min')], shuffle=False)
Epoch 1/56 220/220 [==============================] - 49s 222ms/step - loss: 0.7767 - mse: 0.9693 - rmse: 0.7771 - val_loss: 0.9246 - val_mse: 0.8964 - val_rmse: 0.9247 Epoch 2/56 220/220 [==============================] - 48s 217ms/step - loss: 0.7766 - mse: 0.9691 - rmse: 0.7769 - val_loss: 0.9233 - val_mse: 0.8940 - val_rmse: 0.9233 Epoch 3/56 220/220 [==============================] - 61s 278ms/step - loss: 0.7763 - mse: 0.9686 - rmse: 0.7767 - val_loss: 0.9220 - val_mse: 0.8914 - val_rmse: 0.9220 Epoch 4/56 220/220 [==============================] - 50s 226ms/step - loss: 0.7762 - mse: 0.9683 - rmse: 0.7766 - val_loss: 0.9206 - val_mse: 0.8888 - val_rmse: 0.9206 Epoch 5/56 220/220 [==============================] - 51s 232ms/step - loss: 0.7760 - mse: 0.9679 - rmse: 0.7763 - val_loss: 0.9191 - val_mse: 0.8861 - val_rmse: 0.9191 Epoch 6/56 220/220 [==============================] - 73s 332ms/step - loss: 0.7758 - mse: 0.9676 - rmse: 0.7761 - val_loss: 0.9177 - val_mse: 0.8834 - val_rmse: 0.9177 Epoch 7/56 220/220 [==============================] - 75s 339ms/step - loss: 0.7756 - mse: 0.9672 - rmse: 0.7760 - val_loss: 0.9162 - val_mse: 0.8806 - val_rmse: 0.9162 Epoch 8/56 220/220 [==============================] - 76s 346ms/step - loss: 0.7754 - mse: 0.9669 - rmse: 0.7758 - val_loss: 0.9147 - val_mse: 0.8778 - val_rmse: 0.9147 Epoch 9/56 220/220 [==============================] - 76s 345ms/step - loss: 0.7751 - mse: 0.9664 - rmse: 0.7755 - val_loss: 0.9132 - val_mse: 0.8750 - val_rmse: 0.9132 Epoch 10/56 220/220 [==============================] - 77s 351ms/step - loss: 0.7749 - mse: 0.9661 - rmse: 0.7753 - val_loss: 0.9116 - val_mse: 0.8721 - val_rmse: 0.9116 Epoch 11/56 220/220 [==============================] - 83s 378ms/step - loss: 0.7747 - mse: 0.9658 - rmse: 0.7751 - val_loss: 0.9101 - val_mse: 0.8693 - val_rmse: 0.9101 Epoch 12/56 220/220 [==============================] - 95s 434ms/step - loss: 0.7745 - mse: 0.9654 - rmse: 0.7749 - val_loss: 0.9085 - val_mse: 0.8664 - val_rmse: 0.9085 Epoch 13/56 220/220 [==============================] - 90s 409ms/step - loss: 0.7742 - mse: 0.9650 - rmse: 0.7746 - val_loss: 0.9069 - val_mse: 0.8634 - val_rmse: 0.9069 Epoch 14/56 220/220 [==============================] - 85s 387ms/step - loss: 0.7741 - mse: 0.9648 - rmse: 0.7745 - val_loss: 0.9053 - val_mse: 0.8605 - val_rmse: 0.9053 Epoch 15/56 220/220 [==============================] - 84s 383ms/step - loss: 0.7738 - mse: 0.9643 - rmse: 0.7742 - val_loss: 0.9037 - val_mse: 0.8576 - val_rmse: 0.9037 Epoch 16/56 220/220 [==============================] - 84s 382ms/step - loss: 0.7735 - mse: 0.9640 - rmse: 0.7740 - val_loss: 0.9021 - val_mse: 0.8546 - val_rmse: 0.9021 Epoch 17/56 220/220 [==============================] - 79s 358ms/step - loss: 0.7733 - mse: 0.9637 - rmse: 0.7738 - val_loss: 0.9005 - val_mse: 0.8516 - val_rmse: 0.9005 Epoch 18/56 220/220 [==============================] - 80s 362ms/step - loss: 0.7731 - mse: 0.9633 - rmse: 0.7735 - val_loss: 0.8988 - val_mse: 0.8486 - val_rmse: 0.8989 Epoch 19/56 220/220 [==============================] - 79s 361ms/step - loss: 0.7728 - mse: 0.9630 - rmse: 0.7733 - val_loss: 0.8972 - val_mse: 0.8456 - val_rmse: 0.8972 Epoch 20/56 220/220 [==============================] - 79s 359ms/step - loss: 0.7726 - mse: 0.9626 - rmse: 0.7731 - val_loss: 0.8955 - val_mse: 0.8426 - val_rmse: 0.8956 Epoch 21/56 220/220 [==============================] - 78s 355ms/step - loss: 0.7723 - mse: 0.9622 - rmse: 0.7729 - val_loss: 0.8939 - val_mse: 0.8396 - val_rmse: 0.8939 Epoch 22/56 220/220 [==============================] - 80s 365ms/step - loss: 0.7721 - mse: 0.9620 - rmse: 0.7727 - val_loss: 0.8922 - val_mse: 0.8366 - val_rmse: 0.8922 Epoch 23/56 220/220 [==============================] - 74s 339ms/step - loss: 0.7719 - mse: 0.9617 - rmse: 0.7725 - val_loss: 0.8905 - val_mse: 0.8335 - val_rmse: 0.8906 Epoch 24/56 220/220 [==============================] - 50s 226ms/step - loss: 0.7716 - mse: 0.9612 - rmse: 0.7722 - val_loss: 0.8889 - val_mse: 0.8305 - val_rmse: 0.8889 Epoch 25/56 220/220 [==============================] - 49s 221ms/step - loss: 0.7715 - mse: 0.9611 - rmse: 0.7721 - val_loss: 0.8872 - val_mse: 0.8275 - val_rmse: 0.8872 Epoch 26/56 220/220 [==============================] - 57s 260ms/step - loss: 0.7712 - mse: 0.9607 - rmse: 0.7718 - val_loss: 0.8855 - val_mse: 0.8245 - val_rmse: 0.8856 Epoch 27/56 220/220 [==============================] - 56s 255ms/step - loss: 0.7709 - mse: 0.9603 - rmse: 0.7716 - val_loss: 0.8838 - val_mse: 0.8214 - val_rmse: 0.8839 Epoch 28/56 220/220 [==============================] - 52s 234ms/step - loss: 0.7707 - mse: 0.9600 - rmse: 0.7714 - val_loss: 0.8822 - val_mse: 0.8184 - val_rmse: 0.8822 Epoch 29/56 220/220 [==============================] - 53s 243ms/step - loss: 0.7705 - mse: 0.9599 - rmse: 0.7713 - val_loss: 0.8805 - val_mse: 0.8154 - val_rmse: 0.8805 Epoch 30/56 220/220 [==============================] - 48s 220ms/step - loss: 0.7702 - mse: 0.9595 - rmse: 0.7710 - val_loss: 0.8788 - val_mse: 0.8124 - val_rmse: 0.8789 Epoch 31/56 220/220 [==============================] - 47s 213ms/step - loss: 0.7700 - mse: 0.9592 - rmse: 0.7708 - val_loss: 0.8771 - val_mse: 0.8094 - val_rmse: 0.8772 Epoch 32/56 220/220 [==============================] - 55s 248ms/step - loss: 0.7698 - mse: 0.9588 - rmse: 0.7706 - val_loss: 0.8755 - val_mse: 0.8064 - val_rmse: 0.8755 Epoch 33/56 220/220 [==============================] - 61s 279ms/step - loss: 0.7695 - mse: 0.9586 - rmse: 0.7704 - val_loss: 0.8738 - val_mse: 0.8033 - val_rmse: 0.8738 Epoch 34/56 220/220 [==============================] - 45s 206ms/step - loss: 0.7693 - mse: 0.9583 - rmse: 0.7702 - val_loss: 0.8721 - val_mse: 0.8003 - val_rmse: 0.8722 Epoch 35/56 220/220 [==============================] - 46s 210ms/step - loss: 0.7691 - mse: 0.9581 - rmse: 0.7700 - val_loss: 0.8704 - val_mse: 0.7973 - val_rmse: 0.8705 Epoch 36/56 220/220 [==============================] - 46s 209ms/step - loss: 0.7688 - mse: 0.9577 - rmse: 0.7698 - val_loss: 0.8687 - val_mse: 0.7943 - val_rmse: 0.8688 Epoch 37/56 220/220 [==============================] - 47s 213ms/step - loss: 0.7686 - mse: 0.9575 - rmse: 0.7696 - val_loss: 0.8670 - val_mse: 0.7913 - val_rmse: 0.8671 Epoch 38/56 220/220 [==============================] - 54s 244ms/step - loss: 0.7684 - mse: 0.9572 - rmse: 0.7695 - val_loss: 0.8653 - val_mse: 0.7883 - val_rmse: 0.8654 Epoch 39/56 220/220 [==============================] - 58s 262ms/step - loss: 0.7682 - mse: 0.9569 - rmse: 0.7693 - val_loss: 0.8636 - val_mse: 0.7853 - val_rmse: 0.8637 Epoch 40/56 220/220 [==============================] - 50s 226ms/step - loss: 0.7679 - mse: 0.9566 - rmse: 0.7691 - val_loss: 0.8619 - val_mse: 0.7822 - val_rmse: 0.8620 Epoch 41/56 220/220 [==============================] - 51s 230ms/step - loss: 0.7677 - mse: 0.9564 - rmse: 0.7689 - val_loss: 0.8602 - val_mse: 0.7792 - val_rmse: 0.8603 Epoch 42/56 220/220 [==============================] - 51s 230ms/step - loss: 0.7675 - mse: 0.9560 - rmse: 0.7687 - val_loss: 0.8585 - val_mse: 0.7762 - val_rmse: 0.8586 Epoch 43/56 220/220 [==============================] - 54s 244ms/step - loss: 0.7672 - mse: 0.9558 - rmse: 0.7685 - val_loss: 0.8568 - val_mse: 0.7732 - val_rmse: 0.8569 Epoch 44/56 220/220 [==============================] - 78s 355ms/step - loss: 0.7670 - mse: 0.9556 - rmse: 0.7683 - val_loss: 0.8550 - val_mse: 0.7701 - val_rmse: 0.8552 Epoch 45/56 220/220 [==============================] - 81s 370ms/step - loss: 0.7668 - mse: 0.9553 - rmse: 0.7681 - val_loss: 0.8533 - val_mse: 0.7671 - val_rmse: 0.8534 Epoch 46/56 220/220 [==============================] - 84s 381ms/step - loss: 0.7666 - mse: 0.9551 - rmse: 0.7680 - val_loss: 0.8516 - val_mse: 0.7640 - val_rmse: 0.8517 Epoch 47/56 220/220 [==============================] - 83s 375ms/step - loss: 0.7664 - mse: 0.9548 - rmse: 0.7678 - val_loss: 0.8498 - val_mse: 0.7610 - val_rmse: 0.8500 Epoch 48/56 220/220 [==============================] - 82s 373ms/step - loss: 0.7661 - mse: 0.9546 - rmse: 0.7676 - val_loss: 0.8481 - val_mse: 0.7579 - val_rmse: 0.8482 Epoch 49/56 220/220 [==============================] - 83s 377ms/step - loss: 0.7659 - mse: 0.9543 - rmse: 0.7675 - val_loss: 0.8463 - val_mse: 0.7549 - val_rmse: 0.8465 Epoch 50/56 220/220 [==============================] - 83s 377ms/step - loss: 0.7657 - mse: 0.9541 - rmse: 0.7673 - val_loss: 0.8445 - val_mse: 0.7518 - val_rmse: 0.8447 Epoch 51/56 220/220 [==============================] - 82s 372ms/step - loss: 0.7654 - mse: 0.9538 - rmse: 0.7671 - val_loss: 0.8428 - val_mse: 0.7488 - val_rmse: 0.8429 Epoch 52/56 220/220 [==============================] - 80s 364ms/step - loss: 0.7652 - mse: 0.9537 - rmse: 0.7670 - val_loss: 0.8410 - val_mse: 0.7457 - val_rmse: 0.8412 Epoch 53/56 220/220 [==============================] - 79s 361ms/step - loss: 0.7649 - mse: 0.9533 - rmse: 0.7667 - val_loss: 0.8392 - val_mse: 0.7426 - val_rmse: 0.8394 Epoch 54/56 220/220 [==============================] - 80s 363ms/step - loss: 0.7648 - mse: 0.9531 - rmse: 0.7666 - val_loss: 0.8374 - val_mse: 0.7396 - val_rmse: 0.8376 Epoch 55/56 220/220 [==============================] - 81s 366ms/step - loss: 0.7646 - mse: 0.9529 - rmse: 0.7665 - val_loss: 0.8357 - val_mse: 0.7365 - val_rmse: 0.8359 Epoch 56/56 220/220 [==============================] - 80s 363ms/step - loss: 0.7644 - mse: 0.9528 - rmse: 0.7663 - val_loss: 0.8339 - val_mse: 0.7335 - val_rmse: 0.8341
import matplotlib.pyplot as plt
plt.plot(history.history['loss'], label='MAE Training loss')
plt.plot(history.history['val_loss'], label='MAE Validation loss')
plt.plot(history.history['mse'], label='MSE Training loss')
plt.plot(history.history['val_mse'], label='MSE Validation loss')
plt.plot(history.history['rmse'], label='RMSE Training loss')
plt.plot(history.history['val_rmse'], label='RMSE Validation loss')
plt.legend();
X_train_pred = model.predict(X_train, verbose=0)
train_mae_loss = np.mean(np.abs(X_train_pred - X_train), axis=1)
plt.hist(train_mae_loss, bins=50)
plt.xlabel('Train MAE loss')
plt.ylabel('Number of Samples');
def evaluate_prediction(predictions, actual, model_name):
errors = predictions - actual
mse = np.square(errors).mean()
rmse = np.sqrt(mse)
mae = np.abs(errors).mean()
print(model_name + ':')
print('Mean Absolute Error: {:.4f}'.format(mae))
print('Root Mean Square Error: {:.4f}'.format(rmse))
print('Mean Square Error: {:.4f}'.format(mse))
print('')
return mae,rmse,mse
mae,rmse,mse = evaluate_prediction(X_train_pred, X_train,"LSTM")
LSTM: Mean Absolute Error: 0.7675 Root Mean Square Error: 0.9807 Mean Square Error: 0.9617
model.save(MODELFILENAME+'.h5')
#càlcul del threshold de test
def calculate_threshold(X_test, X_test_pred):
distance = np.sqrt(np.mean(np.square(X_test_pred - X_test),axis=1))
"""Sorting the scores/diffs and using a 0.80 as cutoff value to pick the threshold"""
distance.sort();
cut_off = int(0.85 * len(distance));
threshold = distance[cut_off];
return threshold
for col in columns:
print ("####################### "+col +" ###########################")
#Standardize the test data
scaler = StandardScaler()
test_cpy = test.copy()
test[col] = scaler.fit_transform(test[[col]])
#creem seqüencia amb finestra temporal per les dades de test
X_test1, y_test1 = create_sequences(test[[col]], test[col])
print(f'Testing shape: {X_test1.shape}')
#evaluem el model
eval = model.evaluate(X_test1, y_test1)
print("evaluate: ",eval)
#predim el model
X_test1_pred = model.predict(X_test1, verbose=0)
evaluate_prediction(X_test1_pred, X_test1,"LSTM")
#càlcul del mae_loss
test1_mae_loss = np.mean(np.abs(X_test1_pred - X_test1), axis=1)
test1_rmse_loss = np.sqrt(np.mean(np.square(X_test1_pred - X_test1),axis=1))
# reshaping test prediction
X_test1_predReshape = X_test1_pred.reshape((X_test1_pred.shape[0] * X_test1_pred.shape[1]), X_test1_pred.shape[2])
# reshaping test data
X_test1Reshape = X_test1.reshape((X_test1.shape[0] * X_test1.shape[1]), X_test1.shape[2])
threshold_test = calculate_threshold(X_test1Reshape,X_test1_predReshape)
test1_score_df = pd.DataFrame(test[TIME_STEPS:])
test1_score_df['loss'] = test1_rmse_loss.reshape((-1))
test1_score_df['threshold'] = threshold_test
test1_score_df['anomaly'] = test1_score_df['loss'] > test1_score_df['threshold']
test1_score_df[col] = test[TIME_STEPS:][col]
#gràfic test lost i threshold
fig = go.Figure()
fig.add_trace(go.Scatter(x=test1_score_df.index, y=test1_score_df['loss'], name='Test loss'))
fig.add_trace(go.Scatter(x=test1_score_df.index, y=test1_score_df['threshold'], name='Threshold'))
fig.update_layout(showlegend=True, title='Test loss vs. Threshold')
fig.show()
#Posem les anomalies en un array
anomalies1 = test1_score_df.loc[test1_score_df['anomaly'] == True]
anomalies1.shape
print('anomalies: ',anomalies1.shape); print();
#Gràfic dels punts i de les anomalíes amb els valors de dades transformades per verificar que la normalització que s'ha fet no distorssiona les dades
fig = go.Figure()
fig.add_trace(go.Scatter(x=test1_score_df.index, y=scaler.inverse_transform(test1_score_df[col]), name=col))
fig.add_trace(go.Scatter(x=anomalies1.index, y=scaler.inverse_transform(anomalies1[col]), mode='markers', name='Anomaly'))
fig.update_layout(showlegend=True, title='Detected anomalies')
fig.show()
print ("######################################################")
####################### PM1 ########################### Testing shape: (348, 432, 1)
<ipython-input-17-48420fb1aa44>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy test[col] = scaler.fit_transform(test[[col]])
11/11 [==============================] - 1s 80ms/step - loss: 0.7149 - mse: 1.0000 - rmse: 0.7168 evaluate: [0.71494460105896, 1.0000014305114746, 0.716768741607666] LSTM: Mean Absolute Error: 0.6427 Root Mean Square Error: 0.8904 Mean Square Error: 0.7928
anomalies: (159, 10)
###################################################### ####################### PM25 ########################### Testing shape: (348, 432, 1)
<ipython-input-17-48420fb1aa44>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
11/11 [==============================] - 1s 83ms/step - loss: 0.7547 - mse: 1.1158 - rmse: 0.7565 evaluate: [0.7547376751899719, 1.1158063411712646, 0.7564548850059509] LSTM: Mean Absolute Error: 0.6743 Root Mean Square Error: 0.9096 Mean Square Error: 0.8274
anomalies: (8, 10)
###################################################### ####################### PM10 ########################### Testing shape: (348, 432, 1)
<ipython-input-17-48420fb1aa44>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
11/11 [==============================] - 1s 86ms/step - loss: 0.7860 - mse: 1.1903 - rmse: 0.7878 evaluate: [0.7860275506973267, 1.1903181076049805, 0.7877548336982727] LSTM: Mean Absolute Error: 0.7005 Root Mean Square Error: 0.9219 Mean Square Error: 0.8499
anomalies: (3, 10)
###################################################### ####################### PM1ATM ########################### Testing shape: (348, 432, 1)
<ipython-input-17-48420fb1aa44>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
11/11 [==============================] - 1s 80ms/step - loss: 0.7851 - mse: 1.0386 - rmse: 0.7868 evaluate: [0.7851307988166809, 1.0386375188827515, 0.7868419885635376] LSTM: Mean Absolute Error: 0.7089 Root Mean Square Error: 0.9129 Mean Square Error: 0.8334
anomalies: (0, 10)
###################################################### ####################### PM25ATM ########################### Testing shape: (348, 432, 1)
<ipython-input-17-48420fb1aa44>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
11/11 [==============================] - 1s 80ms/step - loss: 0.7738 - mse: 1.0235 - rmse: 0.7755 evaluate: [0.7737583518028259, 1.0234893560409546, 0.7754641175270081] LSTM: Mean Absolute Error: 0.6994 Root Mean Square Error: 0.9055 Mean Square Error: 0.8199
anomalies: (0, 10)
###################################################### ####################### PM10ATM ########################### Testing shape: (348, 432, 1)
<ipython-input-17-48420fb1aa44>:8: SettingWithCopyWarning: A value is trying to be set on a copy of a slice from a DataFrame. Try using .loc[row_indexer,col_indexer] = value instead See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy
11/11 [==============================] - 1s 81ms/step - loss: 0.7662 - mse: 1.0389 - rmse: 0.7681 evaluate: [0.7662487030029297, 1.0388792753219604, 0.7680541276931763] LSTM: Mean Absolute Error: 0.6910 Root Mean Square Error: 0.9065 Mean Square Error: 0.8217
anomalies: (0, 10)
######################################################